library(tidyverse) # data manipulation
library(ggpubr) # producing data exploratory plots
library(modelsummary) # descriptive data
library(glmmTMB) # running generalised mixed models
library(DHARMa) # model diagnostics
library(performance) # model diagnostics
library(ggeffects) # partial effect plots
library(car) # running Anova on model
library(emmeans) # post-hoc analysisdf_adults_cleaned <- df_adults |>
mutate(FISH_ID = factor(FISH_ID),
Sex = factor(Sex),
Population = factor(Population),
Tank = factor(Tank),
Chamber = factor(Chamber),
System =factor(System),
Temperature =factor(Temperature),
True_resting=factor(True_resting))
df_males <- df_adults_cleaned |>
filter(Sex == "M")
df_females <- df_adults_cleaned |>
filter(Sex == "F")
df_adults_cleaned2 <- df_males |>
full_join(select(df_females, c("Tank","Temperature","Mass","Resting","Max","AAS","FISH_ID","Sex")), by="Tank") |>
mutate(Temperature.x = coalesce(Temperature.x, Temperature.y),
FISH_ID.x = coalesce(FISH_ID.x, FISH_ID.y),
Sex.x = coalesce(Sex.x, Sex.y),
Resting.midpoint = (Resting.x+Resting.y)/2,
Max.midpoint = (Max.x+Max.y)/2,
AAS.midpoint = (AAS.x+AAS.y)/2) df_jresp$Population <- fct_collapse(df_jresp$Population,
`Vlassof cay`= c("Vlassof reef", "Vlassof", "Vlassof Cay", "Vlassof cay"),
`Arlington reef` = c("Arlington reef","Arlginton reef"))
#df_jresp$Female <- fct_collapse(df_jresp$Female,
#`CARL359`= c("CARL359", "CARL59"))
df_jresp2 <- df_jresp |>
unite("F0", c("Male","Female"), sep="_", remove=FALSE) |>
mutate(across(1:7, factor),
Temperature = factor(Temperature),
True_resting = factor(True_resting))
#df_jresp2_rest <- df_jresp2 |>
#filter(True_resting == "Y")temp2a <- temp1a |>
left_join(select(df_adults_cleaned2, c("FISH_ID.x",
"Sex.x",
"Resting.x",
"Max.x",
"AAS.x",
"Mass.x")),
by="FISH_ID.x")temp2b <- temp1b |>
left_join(select(df_adults_cleaned2, c("FISH_ID.y",
"Sex.y",
"Resting.y",
"Max.y",
"AAS.y",
"Mass.y")),
by="FISH_ID.y") df_merged <- temp2a |>
left_join(select(temp2b, c("Clutch","Replicate",
"FISH_ID.y",
"Resting.y",
"Max.y",
"AAS.y",
"Mass.y")),
by=c("Clutch","Replicate"))df <- df_merged |>
mutate(Resting_MALE =Resting.x,
Max_MALE =Max.x,
AAS_MALE =AAS.x,
Mass_MALE =Mass.x,
FISH_ID.y =FISH_ID.x,#makes more sense for males to be .y instead of .x
FISH_ID.x =FISH_ID.x,
Resting_FEMALE =Resting.y,
Max_FEMALE =Max.y,
AAS_FEMALE =AAS.y,
Mass_FEMALE =Mass.y) |>
mutate(Resting_MID =(Resting_MALE+Resting_FEMALE)/2,
Max_MID =(Max_MALE+Max_FEMALE)/2,
AAS_MID =(AAS_MALE+AAS_FEMALE)/2) # easier to do it againplot1 <- ggplot(df, aes(x=Resting_MALE, y=Resting, color=Temperature)) +
stat_smooth(method = "lm") +
geom_point(alpha=0.1) +
ggtitle("Offspring-male relationship") +
xlab("") +
ylab("Resting (parental-male)") +
theme_classic() +
theme(legend.position = 'none')
plot2 <- ggplot(df, aes(x=Max_MALE, y=Max, color=Temperature)) +
stat_smooth(method = "lm") +
geom_point(alpha=0.1) +
ggtitle("Offspring-male relationship") +
xlab("") +
ylab("Max (parental-male)") +
theme_classic() +
theme(legend.position = 'none')
plot3 <- ggplot(df, aes(x=AAS_MALE, y=AAS, color=Temperature)) +
stat_smooth(method = "lm") +
geom_point(alpha=0.1) +
ggtitle("Offspring-male relationship") +
xlab("") +
ylab("AAS (parental-male)") +
theme_classic() +
theme(legend.position = 'none')
plot4 <- ggplot(df, aes(x=Resting_MALE, y=Resting, color=Temperature)) +
stat_smooth(method = "lm") +
#geom_point(alpha=0.1) +
ggtitle("Offspring-male relationship") +
xlab("Resting (offspring)") +
ylab("Resting (parental-male)") +
theme_classic() +
theme(legend.position = "bottom")
plot5 <- ggplot(df, aes(x=Max_MALE, y=Max, color=Temperature)) +
stat_smooth(method = "lm") +
#geom_point(alpha=0.1) +
ggtitle("Offspring-male relationship") +
xlab("Max (offspring)") +
ylab("Max (parental-male)") +
theme_classic() +
theme(legend.position = 'none')
plot6 <- ggplot(df, aes(x=AAS_MALE, y=AAS, color=Temperature)) +
stat_smooth(method = "lm") +
#geom_point(alpha=0.1) +
ggtitle("Offspring-male relationship") +
xlab("AAS (offspring)") +
ylab("AAS (parental-male)") +
theme_classic() +
theme(legend.position = 'none')
ggarrange(plot1, plot2, plot3,
plot4, plot5, plot6,
ncol = 3,
nrow = 3)plot1 <- ggplot(df, aes(x=Resting_FEMALE, y=Resting, color=Temperature)) +
stat_smooth(method = "lm") +
geom_point(alpha=0.1) +
ggtitle("Offspring-female relationship") +
xlab("") +
ylab("Resting (parental-female)") +
theme_classic() +
theme(legend.position = 'none')
plot2 <- ggplot(df, aes(x=Max_FEMALE, y=Max, color=Temperature)) +
stat_smooth(method = "lm") +
geom_point(alpha=0.1) +
ggtitle("Offspring-female relationship") +
xlab("") +
ylab("Max (parental-female)") +
theme_classic() +
theme(legend.position = 'none')
plot3 <- ggplot(df, aes(x=AAS_FEMALE, y=AAS, color=Temperature)) +
stat_smooth(method = "lm") +
geom_point(alpha=0.1) +
ggtitle("Offspring-female relationship") +
xlab("") +
ylab("AAS (parental-female)") +
theme_classic() +
theme(legend.position = 'none')
plot4 <- ggplot(df, aes(x=Resting_FEMALE, y=Resting, color=Temperature)) +
stat_smooth(method = "lm") +
#geom_point(alpha=0.1) +
ggtitle("Offspring-female relationship") +
xlab("Resting (offspring)") +
ylab("Resting (parental-female)") +
theme_classic() +
theme(legend.position = "bottom")
plot5 <- ggplot(df, aes(x=Max_FEMALE, y=Max, color=Temperature)) +
stat_smooth(method = "lm") +
#geom_point(alpha=0.1) +
ggtitle("Offspring-female relationship") +
xlab("Max (offspring)") +
ylab("Max (parental-female)") +
theme_classic() +
theme(legend.position = 'none')
plot6 <- ggplot(df, aes(x=AAS_FEMALE, y=AAS, color=Temperature)) +
stat_smooth(method = "lm") +
#geom_point(alpha=0.1) +
ggtitle("Offspring-female relationship") +
xlab("AAS (offspring)") +
ylab("AAS (parental-female)") +
theme_classic() +
theme(legend.position = 'none')
ggarrange(plot1, plot2, plot3,
plot4, plot5, plot6,
ncol = 3,
nrow = 3)plot1 <- ggplot(df, aes(x=Resting_MID, y=Resting, color=Temperature)) +
stat_smooth(method = "lm") +
geom_point(alpha=0.1) +
ggtitle("Offspring-midpoint relationship") +
xlab("") +
ylab("Resting (parental-midpoint)") +
theme_classic() +
theme(legend.position = 'none')
plot2 <- ggplot(df, aes(x=Max_MID, y=Max, color=Temperature)) +
stat_smooth(method = "lm") +
geom_point(alpha=0.1) +
ggtitle("Offspring-midpoint relationship") +
xlab("") +
ylab("Max (parental-midpoint)") +
theme_classic() +
theme(legend.position = 'none')
plot3 <- ggplot(df, aes(x=AAS_MID, y=AAS, color=Temperature)) +
stat_smooth(method = "lm") +
geom_point(alpha=0.1) +
ggtitle("Offspring-midpoint relationship") +
xlab("") +
ylab("AAS (parental-midpoint)") +
theme_classic() +
theme(legend.position = 'none')
plot4 <- ggplot(df, aes(x=Resting_MID, y=Resting, color=Temperature)) +
stat_smooth(method = "lm") +
#geom_point(alpha=0.1) +
ggtitle("Offspring-midpoint relationship") +
xlab("Resting (offspring)") + ylab("Resting (parental-midpoint)") +
theme_classic() +
theme(legend.position = 'none')
plot5 <- ggplot(df, aes(x=Max_MID, y=Max, color=Temperature)) +
stat_smooth(method = "lm") +
#geom_point(alpha=0.1) +
ggtitle("Offspring-midpoint relationship") +
xlab("Max (offspring)") + ylab("Max (parental-midpoint)") +
theme_classic() +
theme(legend.position = 'none')
plot6 <- ggplot(df, aes(x=AAS_MID, y=AAS, color=Temperature)) +
stat_smooth(method = "lm") +
#geom_point(alpha=0.1) +
ggtitle("Offspring-midpoint relationship") +
xlab("AAS (offspring)") + ylab("AAS (parental-midpoint)") +
theme_classic() +
theme(legend.position = 'none')
ggarrange(plot1, plot2, plot3,
plot4, plot5, plot6,
ncol = 3,
nrow = 3,
common.legend = TRUE)| Population | 27 | 28.5 | 30 |
|---|---|---|---|
| Arlington reef | 60 | 43 | 53 |
| Pretty patches | 26 | 21 | 34 |
| Sudbury reef | 27 | 15 | 16 |
| Vlassof cay | 26 | 10 | 28 |
| F0 | 27 | 28.5 | 30 |
|---|---|---|---|
| CARL217_CARL226 | 0 | 8 | 0 |
| CARL218_CARL222 | 0 | 0 | 13 |
| CARL230_CARL235 | 14 | 0 | 0 |
| CARL233_CARL215 | 0 | 0 | 8 |
| CARL237_CARL219 | 10 | 0 | 0 |
| CARL241_CARL239 | 15 | 0 | 0 |
| CARL249_CARL360 | 0 | 0 | 8 |
| CARL335_CARL359 | 0 | 14 | 0 |
| CARL338_CARL345 | 0 | 8 | 0 |
| CARL344_CARL370 | 0 | 0 | 16 |
| CARL354_CARL355 | 21 | 0 | 0 |
| CARL360_CARL249 | 0 | 0 | 8 |
| CARL367_CARL363 | 0 | 7 | 0 |
| CARL369_CARL349 | 0 | 6 | 0 |
| CPRE189_CPRE202 | 0 | 0 | 15 |
| CPRE372_CPRE209 | 6 | 0 | 0 |
| CPRE372_CPRE370 | 8 | 0 | 0 |
| CPRE375_CPRE377 | 12 | 0 | 0 |
| CPRE391_CPRE390 | 0 | 0 | 6 |
| CPRE447_CPRE452 | 0 | 0 | 13 |
| CPRE453_CPRE459 | 0 | 7 | 0 |
| CPRE521_CPRE524 | 0 | 7 | 0 |
| CPRE550_CPRE533 | 0 | 7 | 0 |
| CSUD002_CSUD213 | 0 | 8 | 0 |
| CSUD009_CSUD212 | 14 | 0 | 0 |
| CSUD013_CSUD017 | 13 | 0 | 0 |
| CSUD016_CSUD078 | 0 | 7 | 0 |
| CSUD312_CSUD304 | 0 | 0 | 16 |
| CVLA049_CVLA098 | 0 | 10 | 0 |
| CVLA089_CVLA059 | 0 | 0 | 7 |
| CVLA102_CVLA466 | 6 | 0 | 0 |
| CVLA106_CVLA091 | 0 | 0 | 15 |
| CVLA468_CVLA477 | 13 | 0 | 0 |
| CVLA486_CVLA463 | 7 | 0 | 0 |
| CVLA498_CVLA493 | 0 | 0 | 6 |
| Temperature | NUnique | mean | median | min | max | sd | Histogram |
|---|---|---|---|---|---|---|---|
| 27 | 135 | 0.21 | 0.21 | 0.08 | 0.64 | 0.07 | ▃▆▇▅▁▁ |
| 28.5 | 89 | 0.23 | 0.23 | 0.09 | 0.47 | 0.08 | ▂▄▆▆▇▄▁▁ ▁ |
| 30 | 131 | 0.23 | 0.23 | 0.06 | 0.40 | 0.07 | ▁▂▃▆▇▅▆▄▁▁ |
| Temperature | NUnique | mean | median | min | max | sd | Histogram |
|---|---|---|---|---|---|---|---|
| 27 | 132 | 0.58 | 0.55 | 0.27 | 1.66 | 0.19 | ▄▇▆▃▁ |
| 28.5 | 85 | 0.65 | 0.64 | 0.24 | 1.08 | 0.18 | ▂▄▇▇▇▃▃▂▂ |
| 30 | 130 | 0.65 | 0.64 | 0.16 | 1.29 | 0.19 | ▂▃▇▅▆▃▁ |
| Temperature | NUnique | mean | median | min | max | sd | Histogram |
|---|---|---|---|---|---|---|---|
| 27 | 128 | 0.37 | 0.34 | 0.14 | 1.02 | 0.14 | ▃▇▇▄▃▂ |
| 28.5 | 85 | 0.42 | 0.39 | 0.10 | 0.79 | 0.15 | ▁▃▅▇▄▃▅▃▁▁ |
| 30 | 130 | 0.42 | 0.41 | 0.09 | 0.99 | 0.15 | ▁▄▆▅▇▃▁ |
| Population | 27 | 28.5 | 30 |
|---|---|---|---|
| Arlington reef | 8 | 7 | 4 |
| Pretty patches | 4 | 6 | 4 |
| Sudbury reef | 4 | 3 | 2 |
| Vlassof cay | 6 | 2 | 5 |
datasummary(Factor(Population) ~ Factor(Temperature)*Factor(Sex),
data = df_adults_cleaned,
fmt = "%.0f")| 27 | 28.5 | 30 | ||||
|---|---|---|---|---|---|---|
| Population | F | M | F | M | F | M |
| Arlington reef | 4 | 4 | 2 | 5 | 2 | 2 |
| Pretty patches | 2 | 2 | 3 | 3 | 3 | 1 |
| Sudbury reef | 2 | 2 | 1 | 2 | 1 | 1 |
| Vlassof cay | 3 | 3 | 1 | 1 | 3 | 2 |
Pairs
datasummary(Factor(Population)*Factor(Temperature.x) ~ AAS.midpoint*(NUnique),
data = df_adults_cleaned2,
fmt = "%.0f")| Population | Temperature.x | NUnique |
|---|---|---|
| Arlington reef | 27 | 3 |
| 28.5 | 2 | |
| 30 | 2 | |
| Pretty patches | 27 | 2 |
| 28.5 | 3 | |
| 30 | 1 | |
| Sudbury reef | 27 | 2 |
| 28.5 | 2 | |
| 30 | 1 | |
| Vlassof cay | 27 | 3 |
| 28.5 | 1 | |
| 30 | 2 |
| Temperature | NUnique | mean | median | min | max | sd | Histogram |
|---|---|---|---|---|---|---|---|
| 27 | 22 | 6.29 | 6.06 | 3.82 | 10.09 | 1.56 | ▂▂▃▇▂▂▁▁▁▁ |
| 28.5 | 18 | 6.49 | 6.96 | 4.35 | 8.49 | 1.45 | ▇▂▅▂▃▃▅▃ |
| 30 | 15 | 7.29 | 7.20 | 5.14 | 9.15 | 1.46 | ▅▂▇▂▂▂▇▇ |
| Temperature | NUnique | mean | median | min | max | sd | Histogram |
|---|---|---|---|---|---|---|---|
| 27 | 22 | 16.58 | 16.91 | 9.70 | 22.06 | 3.36 | ▃▃▅▅▃▃▅▇▂ |
| 28.5 | 18 | 17.09 | 17.23 | 11.04 | 28.39 | 3.94 | ▅▂▅▇▇▃▂ |
| 30 | 15 | 16.48 | 16.83 | 11.78 | 21.24 | 2.82 | ▂▃▂▃▇▂▂▃▂ |
| Temperature | NUnique | mean | median | min | max | sd | Histogram |
|---|---|---|---|---|---|---|---|
| 27 | 22 | 10.29 | 10.26 | 3.85 | 16.28 | 3.14 | ▃▁▄▇▃▆▃▃▁ |
| 28.5 | 18 | 10.59 | 9.66 | 6.11 | 20.44 | 3.66 | ▅▅▇▇▃▂▂ |
| 30 | 15 | 9.19 | 9.16 | 4.36 | 12.77 | 2.91 | ▃▂▅▂▂▂▃▇ |
model1 <- glmmTMB(AAS ~ (1|Clutch),
family="gaussian",
data = df)
model2 <- glmmTMB(AAS ~ (1|Clutch) + (1|Population),
family="gaussian",
data = df)
model3 <- glmmTMB(AAS ~ (1|Clutch) + (1|Chamber),
family="gaussian",
data = df)
model4 <- glmmTMB(AAS ~ (1|Clutch) + (1|Population) + (1|Chamber),
family="gaussian",
data = df)After figuring out which random factors will be incorporated into the model we will start to examine out fixed factors. Some fixed factors such as AAS_(FE)MALE and TEMPERATURE will be essential to answering questions we have around heritability. Another factor that will be included is Dry_mass - which it should be pointed out in this experiment refers to the mass of fish after they were blotted dry with paper towel rather than completely dried out. Larger fish consume more oxygen, therefore, we need to account for this known relationship within our model. Out model will look something like this:
If we had alternative hypotheses to test would would do so at this stage. But in this instance the experiment was designed to answer a specific question via limiting potential covariates.
model1.1 <- glmmTMB(AAS ~ AAS_MALE*Temperature+scale(Dry_mass) + (1|Clutch),
family="gaussian",
data=df)Great now lets check how out model performed via model validation techniques
To check out model performance we will be using two different packages that perform model diagnositics. The packages used here are just examples, there are other packages out there that can provide the same function.
## Object of Class DHARMa with simulated residuals based on 250 simulations with refit = FALSE . See ?DHARMa::simulateResiduals for help.
##
## Scaled residual values: 0.42 0.26 0.24 0.236 0.04 0.956 0.34 0.168 0.704 0.472 0.34 0.48 0.472 0.224 0.568 0.148 0.284 0.536 0.776 0.312 ...
## $uniformity
##
## Asymptotic one-sample Kolmogorov-Smirnov test
##
## data: simulationOutput$scaledResiduals
## D = 0.084, p-value = 0.04127
## alternative hypothesis: two-sided
##
##
## $dispersion
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 0.99749, p-value = 0.976
## alternative hypothesis: two.sided
##
##
## $outliers
##
## DHARMa outlier test based on exact binomial test with approximate
## expectations
##
## data: simulationOutput
## outliers at both margin(s) = 3, observations = 275, p-value = 0.4859
## alternative hypothesis: true probability of success is not equal to 0.007968127
## 95 percent confidence interval:
## 0.002255392 0.031548215
## sample estimates:
## frequency of outliers (expected: 0.00796812749003984 )
## 0.01090909
## $uniformity
##
## Asymptotic one-sample Kolmogorov-Smirnov test
##
## data: simulationOutput$scaledResiduals
## D = 0.084, p-value = 0.04127
## alternative hypothesis: two-sided
##
##
## $dispersion
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 0.99749, p-value = 0.976
## alternative hypothesis: two.sided
##
##
## $outliers
##
## DHARMa outlier test based on exact binomial test with approximate
## expectations
##
## data: simulationOutput
## outliers at both margin(s) = 3, observations = 275, p-value = 0.4859
## alternative hypothesis: true probability of success is not equal to 0.007968127
## 95 percent confidence interval:
## 0.002255392 0.031548215
## sample estimates:
## frequency of outliers (expected: 0.00796812749003984 )
## 0.01090909
## Family: gaussian ( identity )
## Formula: AAS ~ AAS_MALE * Temperature + scale(Dry_mass) + (1 | Clutch)
## Data: df
##
## AIC BIC logLik deviance df.resid
## -390.9 -358.3 204.4 -408.9 266
##
## Random effects:
##
## Conditional model:
## Groups Name Variance Std.Dev.
## Clutch (Intercept) 0.0007387 0.02718
## Residual 0.0125995 0.11225
## Number of obs: 275, groups: Clutch, 42
##
## Dispersion estimate for gaussian family (sigma^2): 0.0126
##
## Conditional model:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) 0.325579 0.040334 8.072 6.91e-16 ***
## AAS_MALE 0.003783 0.003988 0.949 0.34278
## Temperature28.5 0.099104 0.065889 1.504 0.13256
## Temperature30 0.208721 0.063743 3.274 0.00106 **
## scale(Dry_mass) 0.080935 0.007317 11.062 < 2e-16 ***
## AAS_MALE:Temperature28.5 -0.006174 0.006544 -0.944 0.34540
## AAS_MALE:Temperature30 -0.012165 0.006488 -1.875 0.06082 .
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 2.5 % 97.5 % Estimate
## (Intercept) 0.24652531 0.4046328583 0.325579082
## AAS_MALE -0.00403249 0.0115982776 0.003782894
## Temperature28.5 -0.03003705 0.2282446862 0.099103816
## Temperature30 0.08378613 0.3336556209 0.208720877
## scale(Dry_mass) 0.06659476 0.0952757514 0.080935257
## AAS_MALE:Temperature28.5 -0.01899927 0.0066510253 -0.006174120
## AAS_MALE:Temperature30 -0.02488180 0.0005524731 -0.012164662
## Std.Dev.(Intercept)|Clutch 0.01222338 0.0604326590 0.027178883
model1.1 |> emmeans(pairwise ~ Temperature, type="response") |>
summary(by=NULL, adjust="sidak", infer=TRUE)## NOTE: Results may be misleading due to involvement in interactions
## $emmeans
## Temperature emmean SE df lower.CL upper.CL t.ratio p.value
## 27 0.365 0.0117 266 0.337 0.393 31.357 <.0001
## 28.5 0.406 0.0153 266 0.369 0.443 26.457 <.0001
## 30 0.458 0.0163 266 0.419 0.498 28.090 <.0001
##
## Confidence level used: 0.95
## Conf-level adjustment: sidak method for 3 estimates
## P value adjustment: sidak method for 3 tests
##
## $contrasts
## contrast estimate SE df lower.CL upper.CL t.ratio
## Temperature27 - Temperature28.5 -0.0404 0.0192 266 -0.0866 0.00583 -2.100
## Temperature27 - Temperature30 -0.0930 0.0201 266 -0.1413 -0.04475 -4.630
## Temperature28.5 - Temperature30 -0.0526 0.0226 266 -0.1070 0.00175 -2.325
## p.value
## 0.1061
## <.0001
## 0.0611
##
## Confidence level used: 0.95
## Conf-level adjustment: sidak method for 3 estimates
## P value adjustment: sidak method for 3 tests
om.aas <- emmeans(model1.1, ~AAS_MALE*Temperature,
at =list(AAS_MALE =seq(from=3, to =16, by=.2)))
om.aas.df <- as.data.frame(om.aas)
om.aas.obs <- drop_na(df, AAS_MALE, AAS) |>
mutate(Pred =predict(model1.1, re.form =NA, type='response'),
Resid =residuals(model1.1, type ="response"),
Fit =Pred + Resid)
om.aas.obs.summarize <- om.aas.obs |>
group_by(Clutch, Temperature) |>
summarise(mean.aas =mean(AAS, na.rm=TRUE),
mean.aas_male =mean(AAS_MALE, na.rm=TRUE),
sd.aas =sd(AAS, na.rm =TRUE),
n.aas = n()) |>
mutate(se.aas = sd.aas / sqrt(n.aas),
lower.ci.aas =mean.aas - qt(1 - (0.05/2), n.aas -1) * se.aas,
upper.ci.aas =mean.aas + qt(1 - (0.05/2), n.aas - 1) * se.aas)|>
ungroup()## `summarise()` has grouped output by 'Clutch'. You can override using the
## `.groups` argument.
ggplot(data =om.aas.df, aes(y=emmean, x=AAS_MALE)) +
stat_smooth(aes(color=Temperature),
method = "lm") +
geom_pointrange(data = om.aas.obs.summarize, aes(y =mean.aas, x=mean.aas_male,
ymin =lower.ci.aas,
ymax =upper.ci.aas, color = Temperature),
alpha =0.2) +
facet_wrap(~Temperature) +
theme_classic() +
theme(legend.position ="bottom")## `geom_smooth()` using formula = 'y ~ x'
fmodel1.1 <- glmmTMB(AAS ~ AAS_FEMALE*Temperature+scale(Dry_mass) + (1|Clutch),
family="gaussian",
data=df)Great now lets check how out model performed via model validation techniques
To check out model performance we will be using two different packages that perform model diagnositics. The packages used here are just examples, there are other packages out there that can provide the same function.
## Object of Class DHARMa with simulated residuals based on 250 simulations with refit = FALSE . See ?DHARMa::simulateResiduals for help.
##
## Scaled residual values: 0.168 0.596 0.5 0.424 0.496 0.544 0.224 0.652 0.26 0.4 0.556 0.808 0.448 0.076 0.34 0.388 0.42 0.22 0.48 0.468 ...
## $uniformity
##
## Asymptotic one-sample Kolmogorov-Smirnov test
##
## data: simulationOutput$scaledResiduals
## D = 0.079685, p-value = 0.06736
## alternative hypothesis: two-sided
##
##
## $dispersion
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 1.0005, p-value = 0.984
## alternative hypothesis: two.sided
##
##
## $outliers
##
## DHARMa outlier test based on exact binomial test with approximate
## expectations
##
## data: simulationOutput
## outliers at both margin(s) = 4, observations = 267, p-value = 0.1659
## alternative hypothesis: true probability of success is not equal to 0.007968127
## 95 percent confidence interval:
## 0.004096585 0.037911652
## sample estimates:
## frequency of outliers (expected: 0.00796812749003984 )
## 0.01498127
## $uniformity
##
## Asymptotic one-sample Kolmogorov-Smirnov test
##
## data: simulationOutput$scaledResiduals
## D = 0.079685, p-value = 0.06736
## alternative hypothesis: two-sided
##
##
## $dispersion
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 1.0005, p-value = 0.984
## alternative hypothesis: two.sided
##
##
## $outliers
##
## DHARMa outlier test based on exact binomial test with approximate
## expectations
##
## data: simulationOutput
## outliers at both margin(s) = 4, observations = 267, p-value = 0.1659
## alternative hypothesis: true probability of success is not equal to 0.007968127
## 95 percent confidence interval:
## 0.004096585 0.037911652
## sample estimates:
## frequency of outliers (expected: 0.00796812749003984 )
## 0.01498127
## Family: gaussian ( identity )
## Formula:
## AAS ~ AAS_FEMALE * Temperature + scale(Dry_mass) + (1 | Clutch)
## Data: df
##
## AIC BIC logLik deviance df.resid
## -359.9 -327.6 188.9 -377.9 258
##
## Random effects:
##
## Conditional model:
## Groups Name Variance Std.Dev.
## Clutch (Intercept) 0.000972 0.03118
## Residual 0.013403 0.11577
## Number of obs: 267, groups: Clutch, 41
##
## Dispersion estimate for gaussian family (sigma^2): 0.0134
##
## Conditional model:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) 0.3563550 0.0425236 8.380 <2e-16 ***
## AAS_FEMALE 0.0006821 0.0038427 0.178 0.859
## Temperature28.5 -0.0424847 0.0818289 -0.519 0.604
## Temperature30 0.0781039 0.0737486 1.059 0.290
## scale(Dry_mass) 0.0874917 0.0079082 11.063 <2e-16 ***
## AAS_FEMALE:Temperature28.5 0.0057877 0.0066839 0.866 0.387
## AAS_FEMALE:Temperature30 0.0011157 0.0072302 0.154 0.877
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 2.5 % 97.5 % Estimate
## (Intercept) 0.273010282 0.439699745 0.3563550133
## AAS_FEMALE -0.006849404 0.008213582 0.0006820893
## Temperature28.5 -0.202866442 0.117896991 -0.0424847254
## Temperature30 -0.066440831 0.222648535 0.0781038519
## scale(Dry_mass) 0.071991797 0.102991558 0.0874916780
## AAS_FEMALE:Temperature28.5 -0.007312555 0.018888029 0.0057877369
## AAS_FEMALE:Temperature30 -0.013055304 0.015286728 0.0011157124
## Std.Dev.(Intercept)|Clutch 0.015529117 0.062593217 0.0311771936
om.aas <- emmeans(fmodel1.1, ~AAS_FEMALE*Temperature,
at =list(AAS_FEMALE =seq(from=2, to =22, by=.2)))
om.aas.df <- as.data.frame(om.aas)
om.aas.obs <- drop_na(df, AAS_FEMALE, AAS) |>
mutate(Pred =predict(fmodel1.1, re.form =NA, type='response'),
Resid =residuals(fmodel1.1, type ="response"),
Fit =Pred + Resid)
om.aas.obs.summarize <- om.aas.obs |>
group_by(Clutch, Temperature) |>
summarise(mean.aas =mean(AAS, na.rm=TRUE),
mean.aas_female =mean(AAS_FEMALE, na.rm=TRUE),
sd.aas =sd(AAS, na.rm =TRUE),
n.aas = n()) |>
mutate(se.aas = sd.aas / sqrt(n.aas),
lower.ci.aas =mean.aas - qt(1 - (0.05/2), n.aas -1) * se.aas,
upper.ci.aas =mean.aas + qt(1 - (0.05/2), n.aas - 1) * se.aas)|>
ungroup()## `summarise()` has grouped output by 'Clutch'. You can override using the
## `.groups` argument.
ggplot(data =om.aas.df, aes(y=emmean, x=AAS_FEMALE)) +
stat_smooth(aes(color=Temperature),
method = "lm") +
geom_pointrange(data = om.aas.obs.summarize, aes(y =mean.aas, x=mean.aas_female,
ymin =lower.ci.aas,
ymax =upper.ci.aas, color = Temperature),
alpha =0.2) +
facet_wrap(~Temperature) +
theme_classic() +
theme(legend.position ="bottom")## `geom_smooth()` using formula = 'y ~ x'
mid_model1.1 <- glmmTMB(AAS ~ AAS_MID*Temperature+scale(Dry_mass) + (1|Clutch),
family="gaussian",
data=df)Great now lets check how out model performed via model validation techniques
To check out model performance we will be using two different packages that perform model diagnositics. The packages used here are just examples, there are other packages out there that can provide the same function.
## Object of Class DHARMa with simulated residuals based on 250 simulations with refit = FALSE . See ?DHARMa::simulateResiduals for help.
##
## Scaled residual values: 0.188 0.6 0.492 0.4 0.564 0.488 0.224 0.608 0.188 0.34 0.568 0.82 0.392 0.052 0.348 0.392 0.408 0.172 0.428 0.544 ...
## $uniformity
##
## Asymptotic one-sample Kolmogorov-Smirnov test
##
## data: simulationOutput$scaledResiduals
## D = 0.086142, p-value = 0.05762
## alternative hypothesis: two-sided
##
##
## $dispersion
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 1.0049, p-value = 0.944
## alternative hypothesis: two.sided
##
##
## $outliers
##
## DHARMa outlier test based on exact binomial test with approximate
## expectations
##
## data: simulationOutput
## outliers at both margin(s) = 2, observations = 239, p-value = 0.7163
## alternative hypothesis: true probability of success is not equal to 0.007968127
## 95 percent confidence interval:
## 0.001015039 0.029900205
## sample estimates:
## frequency of outliers (expected: 0.00796812749003984 )
## 0.008368201
## $uniformity
##
## Asymptotic one-sample Kolmogorov-Smirnov test
##
## data: simulationOutput$scaledResiduals
## D = 0.086142, p-value = 0.05762
## alternative hypothesis: two-sided
##
##
## $dispersion
##
## DHARMa nonparametric dispersion test via sd of residuals fitted vs.
## simulated
##
## data: simulationOutput
## dispersion = 1.0049, p-value = 0.944
## alternative hypothesis: two.sided
##
##
## $outliers
##
## DHARMa outlier test based on exact binomial test with approximate
## expectations
##
## data: simulationOutput
## outliers at both margin(s) = 2, observations = 239, p-value = 0.7163
## alternative hypothesis: true probability of success is not equal to 0.007968127
## 95 percent confidence interval:
## 0.001015039 0.029900205
## sample estimates:
## frequency of outliers (expected: 0.00796812749003984 )
## 0.008368201
## Family: gaussian ( identity )
## Formula: AAS ~ AAS_MID * Temperature + scale(Dry_mass) + (1 | Clutch)
## Data: df
##
## AIC BIC logLik deviance df.resid
## -346.5 -315.2 182.3 -364.5 230
##
## Random effects:
##
## Conditional model:
## Groups Name Variance Std.Dev.
## Clutch (Intercept) 0.0007871 0.02806
## Residual 0.0120687 0.10986
## Number of obs: 239, groups: Clutch, 37
##
## Dispersion estimate for gaussian family (sigma^2): 0.0121
##
## Conditional model:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) 0.3310710 0.0463907 7.137 9.57e-13 ***
## AAS_MID 0.0032718 0.0044675 0.732 0.4640
## Temperature28.5 0.0178173 0.0952662 0.187 0.8516
## Temperature30 0.3118662 0.1360477 2.292 0.0219 *
## scale(Dry_mass) 0.0830630 0.0079271 10.478 < 2e-16 ***
## AAS_MID:Temperature28.5 0.0006733 0.0082425 0.082 0.9349
## AAS_MID:Temperature30 -0.0231143 0.0143845 -1.607 0.1081
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 2.5 % 97.5 % Estimate
## (Intercept) 0.240146844 0.421995102 0.3310709731
## AAS_MID -0.005484411 0.012027982 0.0032717855
## Temperature28.5 -0.168900938 0.204535637 0.0178173495
## Temperature30 0.045217709 0.578514730 0.3118662193
## scale(Dry_mass) 0.067526170 0.098599850 0.0830630098
## AAS_MID:Temperature28.5 -0.015481591 0.016828280 0.0006733442
## AAS_MID:Temperature30 -0.051307394 0.005078864 -0.0231142650
## Std.Dev.(Intercept)|Clutch 0.012545571 0.062742643 0.0280560560
om.aas <- emmeans(mid_model1.1, ~AAS_MID*Temperature,
at =list(AAS_MID =seq(from=2, to =18, by=.2)))
om.aas.df <- as.data.frame(om.aas)
om.aas.obs <- drop_na(df, AAS_MID, AAS) |>
mutate(Pred =predict(mid_model1.1, re.form =NA, type='response'),
Resid =residuals(mid_model1.1, type ="response"),
Fit =Pred + Resid)
om.aas.obs.summarize <- om.aas.obs |>
group_by(Clutch, Temperature) |>
summarise(mean.aas =mean(AAS, na.rm=TRUE),
mean.aas_female =mean(AAS_MID, na.rm=TRUE),
sd.aas =sd(AAS, na.rm =TRUE),
n.aas = n()) |>
mutate(se.aas = sd.aas / sqrt(n.aas),
lower.ci.aas =mean.aas - qt(1 - (0.05/2), n.aas -1) * se.aas,
upper.ci.aas =mean.aas + qt(1 - (0.05/2), n.aas - 1) * se.aas)|>
ungroup()## `summarise()` has grouped output by 'Clutch'. You can override using the
## `.groups` argument.
ggplot(data =om.aas.df, aes(y=emmean, x=AAS_MID)) +
stat_smooth(aes(color=Temperature),
method = "lm") +
geom_pointrange(data = om.aas.obs.summarize, aes(y =mean.aas, x=mean.aas_female,
ymin =lower.ci.aas,
ymax =upper.ci.aas, color = Temperature),
alpha =0.2) +
facet_wrap(~Temperature) +
theme_classic() +
theme(legend.position ="bottom")## `geom_smooth()` using formula = 'y ~ x'